.endm
- .macro save_x4_to_x29_sp_el0
- stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
- stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
- stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
- stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
- stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
- stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
- stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
- stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
- stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
- stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
- stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
- stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
- stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
- mrs x18, sp_el0
- str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
- .endm
-
-
vector_base runtime_exceptions
/* ---------------------------------------------------------------------
*
* Save x4-x29 and sp_el0.
*/
- save_x4_to_x29_sp_el0
+ stp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
+ stp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
+ stp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
+ stp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
+ stp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
+ stp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
+ stp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
+ stp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
+ stp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
+ stp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
+ stp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
+ stp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
+ stp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
+ mrs x18, sp_el0
+ str x18, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
mov x5, xzr
mov x6, sp
smc_unknown:
/*
- * Here we restore x4-x18 regardless of where we came from. AArch32
- * callers will find the registers contents unchanged, but AArch64
- * callers will find the registers modified (with stale earlier NS
- * content). Either way, we aren't leaking any secure information
- * through them.
+ * Unknown SMC call. Populate return value with SMC_UNK, restore
+ * GP registers, and return to caller.
*/
mov x0, #SMC_UNK
- b restore_gp_registers_callee_eret
+ str x0, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
+ b restore_gp_registers_eret
smc_prohibited:
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
/*
- * Copyright (c) 2013-2017, ARM Limited and Contributors. All rights reserved.
+ * Copyright (c) 2013-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
.global fpregs_context_restore
#endif
.global save_gp_registers
+ .global restore_gp_registers
.global restore_gp_registers_eret
- .global restore_gp_registers_callee_eret
.global el3_exit
/* -----------------------------------------------------
ret
endfunc save_gp_registers
-func restore_gp_registers_eret
+/*
+ * This function restores all general purpose registers except x30 from the
+ * CPU context. x30 register must be explicitly restored by the caller.
+ */
+func restore_gp_registers
ldp x0, x1, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X0]
ldp x2, x3, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X2]
- b restore_gp_registers_callee_eret
-endfunc restore_gp_registers_eret
-
-func restore_gp_registers_callee_eret
ldp x4, x5, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X4]
ldp x6, x7, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X6]
ldp x8, x9, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X8]
ldp x10, x11, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X10]
ldp x12, x13, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X12]
ldp x14, x15, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X14]
+ ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
ldp x18, x19, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X18]
ldp x20, x21, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X20]
ldp x22, x23, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X22]
ldp x24, x25, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X24]
ldp x26, x27, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X26]
+ ldr x28, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_SP_EL0]
+ msr sp_el0, x28
ldp x28, x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X28]
- ldp x30, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
- msr sp_el0, x17
- ldp x16, x17, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X16]
+ ret
+endfunc restore_gp_registers
+
+/*
+ * Restore general purpose registers (including x30), and exit EL3 via. ERET to
+ * a lower exception level.
+ */
+func restore_gp_registers_eret
+ bl restore_gp_registers
+ ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
eret
-endfunc restore_gp_registers_callee_eret
+endfunc restore_gp_registers_eret
/* -----------------------------------------------------
* This routine assumes that the SP_EL3 is pointing to